import numpy as np
import tensorflow.compat.v2 as tf
tf.enable_v2_behavior()
import pandas as pd
from tensorflow import keras
from sklearn.preprocessing import StandardScaler
from sklearn.preprocessing import RobustScaler
from sklearn.preprocessing import MinMaxScaler
from matplotlib import pyplot
import plotly.graph_objects as go
import math
import seaborn as sns
from sklearn.metrics import mean_squared_error
np.random.seed(1)
tf.random.set_seed(1)
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, LSTM, GRU, Dropout, RepeatVector, TimeDistributed
from keras import backend
MODELFILENAME = 'MODELS/LSTM_1d_TFM_2c'
TIME_STEPS=144 #1d
CMODEL = LSTM
UNITS=43
DROPOUT1=0.405
DROPOUT2=0.331
ACTIVATION='tanh'
OPTIMIZER='adadelta'
EPOCHS=56
BATCHSIZE=11
VALIDATIONSPLIT=0.1
# Code to read csv file into Colaboratory:
# from google.colab import files
# uploaded = files.upload()
# import io
# df = pd.read_csv(io.BytesIO(uploaded['SentDATA.csv']))
# Dataset is now stored in a Pandas Dataframe
df = pd.read_csv('../../data/dadesTFM.csv')
df.reset_index(inplace=True)
df['Time'] = pd.to_datetime(df['Time'])
df = df.set_index('Time')
columns = ['PM1','PM25','PM10','PM1ATM','PM25ATM','PM10ATM']
df1 = df.copy();
df1 = df1.rename(columns={"PM 1":"PM1","PM 2.5":"PM25","PM 10":"PM10","PM 1 ATM":"PM1ATM","PM 2.5 ATM":"PM25ATM","PM 10 ATM":"PM10ATM"})
df1['PM1'] = df['PM 1'].astype(np.float32)
df1['PM25'] = df['PM 2.5'].astype(np.float32)
df1['PM10'] = df['PM 10'].astype(np.float32)
df1['PM1ATM'] = df['PM 1 ATM'].astype(np.float32)
df1['PM25ATM'] = df['PM 2.5 ATM'].astype(np.float32)
df1['PM10ATM'] = df['PM 10 ATM'].astype(np.float32)
df2 = df1.copy()
train_size = int(len(df2) * 0.8)
test_size = len(df2) - train_size
train, test = df2.iloc[0:train_size], df2.iloc[train_size:len(df2)]
train.shape, test.shape
((3117, 7), (780, 7))
#Standardize the data
for col in columns:
scaler = StandardScaler()
train[col] = scaler.fit_transform(train[[col]])
<ipython-input-6-83cecdbc25f8>:4: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy train[col] = scaler.fit_transform(train[[col]]) <ipython-input-6-83cecdbc25f8>:4: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy train[col] = scaler.fit_transform(train[[col]]) <ipython-input-6-83cecdbc25f8>:4: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy train[col] = scaler.fit_transform(train[[col]]) <ipython-input-6-83cecdbc25f8>:4: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy train[col] = scaler.fit_transform(train[[col]]) <ipython-input-6-83cecdbc25f8>:4: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy train[col] = scaler.fit_transform(train[[col]]) <ipython-input-6-83cecdbc25f8>:4: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy train[col] = scaler.fit_transform(train[[col]])
def create_sequences(X, y, time_steps=TIME_STEPS):
Xs, ys = [], []
for i in range(len(X)-time_steps):
Xs.append(X.iloc[i:(i+time_steps)].values)
ys.append(y.iloc[i+time_steps])
return np.array(Xs), np.array(ys)
X_train, y_train = create_sequences(train[[columns[1]]], train[columns[1]])
#X_test, y_test = create_sequences(test[[columns[1]]], test[columns[1]])
print(f'X_train shape: {X_train.shape}')
print(f'y_train shape: {y_train.shape}')
X_train shape: (2973, 144, 1) y_train shape: (2973,)
#afegir nova mètrica
def rmse(y_true, y_pred):
return backend.sqrt(backend.mean(backend.square(y_pred - y_true), axis=-1))
model = Sequential()
model.add(CMODEL(units = UNITS, return_sequences=True, input_shape=(X_train.shape[1], X_train.shape[2])))
model.add(Dropout(rate=DROPOUT1))
model.add(CMODEL(units = UNITS, return_sequences=True))
model.add(Dropout(rate=DROPOUT2))
model.add(TimeDistributed(Dense(1,kernel_initializer='normal',activation=ACTIVATION)))
model.compile(optimizer=OPTIMIZER, loss='mae',metrics=['mse',rmse])
model.summary()
Model: "sequential" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= lstm (LSTM) (None, 144, 43) 7740 _________________________________________________________________ dropout (Dropout) (None, 144, 43) 0 _________________________________________________________________ lstm_1 (LSTM) (None, 144, 43) 14964 _________________________________________________________________ dropout_1 (Dropout) (None, 144, 43) 0 _________________________________________________________________ time_distributed (TimeDistri (None, 144, 1) 44 ================================================================= Total params: 22,748 Trainable params: 22,748 Non-trainable params: 0 _________________________________________________________________
history = model.fit(X_train, y_train, epochs=EPOCHS, batch_size=BATCHSIZE, validation_split=VALIDATIONSPLIT,
callbacks=[keras.callbacks.EarlyStopping(monitor='val_loss', patience=5, mode='min')], shuffle=False)
Epoch 1/56 244/244 [==============================] - 28s 113ms/step - loss: 0.8142 - mse: 1.0557 - rmse: 0.8144 - val_loss: 0.9256 - val_mse: 0.8955 - val_rmse: 0.9256 Epoch 2/56 244/244 [==============================] - 24s 99ms/step - loss: 0.8114 - mse: 1.0493 - rmse: 0.8116 - val_loss: 0.9198 - val_mse: 0.8846 - val_rmse: 0.9198 Epoch 3/56 244/244 [==============================] - 25s 104ms/step - loss: 0.8088 - mse: 1.0435 - rmse: 0.8091 - val_loss: 0.9144 - val_mse: 0.8746 - val_rmse: 0.9144 Epoch 4/56 244/244 [==============================] - 25s 102ms/step - loss: 0.8065 - mse: 1.0384 - rmse: 0.8069 - val_loss: 0.9093 - val_mse: 0.8652 - val_rmse: 0.9093 Epoch 5/56 244/244 [==============================] - 25s 101ms/step - loss: 0.8042 - mse: 1.0333 - rmse: 0.8046 - val_loss: 0.9043 - val_mse: 0.8561 - val_rmse: 0.9044 Epoch 6/56 244/244 [==============================] - 26s 105ms/step - loss: 0.8019 - mse: 1.0284 - rmse: 0.8025 - val_loss: 0.8995 - val_mse: 0.8472 - val_rmse: 0.8995 Epoch 7/56 244/244 [==============================] - 25s 101ms/step - loss: 0.7998 - mse: 1.0237 - rmse: 0.8004 - val_loss: 0.8946 - val_mse: 0.8385 - val_rmse: 0.8947 Epoch 8/56 244/244 [==============================] - 26s 105ms/step - loss: 0.7977 - mse: 1.0192 - rmse: 0.7984 - val_loss: 0.8898 - val_mse: 0.8298 - val_rmse: 0.8899 Epoch 9/56 244/244 [==============================] - 25s 104ms/step - loss: 0.7956 - mse: 1.0146 - rmse: 0.7964 - val_loss: 0.8851 - val_mse: 0.8211 - val_rmse: 0.8851 Epoch 10/56 244/244 [==============================] - 25s 102ms/step - loss: 0.7935 - mse: 1.0101 - rmse: 0.7944 - val_loss: 0.8802 - val_mse: 0.8125 - val_rmse: 0.8803 Epoch 11/56 244/244 [==============================] - 25s 104ms/step - loss: 0.7914 - mse: 1.0058 - rmse: 0.7926 - val_loss: 0.8754 - val_mse: 0.8038 - val_rmse: 0.8755 Epoch 12/56 244/244 [==============================] - 26s 105ms/step - loss: 0.7893 - mse: 1.0012 - rmse: 0.7906 - val_loss: 0.8705 - val_mse: 0.7950 - val_rmse: 0.8706 Epoch 13/56 244/244 [==============================] - 25s 101ms/step - loss: 0.7872 - mse: 0.9966 - rmse: 0.7886 - val_loss: 0.8655 - val_mse: 0.7862 - val_rmse: 0.8656 Epoch 14/56 244/244 [==============================] - 25s 102ms/step - loss: 0.7850 - mse: 0.9922 - rmse: 0.7867 - val_loss: 0.8604 - val_mse: 0.7773 - val_rmse: 0.8606 Epoch 15/56 244/244 [==============================] - 25s 101ms/step - loss: 0.7829 - mse: 0.9879 - rmse: 0.7848 - val_loss: 0.8553 - val_mse: 0.7684 - val_rmse: 0.8555 Epoch 16/56 244/244 [==============================] - 26s 105ms/step - loss: 0.7808 - mse: 0.9833 - rmse: 0.7828 - val_loss: 0.8501 - val_mse: 0.7593 - val_rmse: 0.8503 Epoch 17/56 244/244 [==============================] - 24s 100ms/step - loss: 0.7785 - mse: 0.9786 - rmse: 0.7808 - val_loss: 0.8448 - val_mse: 0.7502 - val_rmse: 0.8451 Epoch 18/56 244/244 [==============================] - 25s 101ms/step - loss: 0.7765 - mse: 0.9744 - rmse: 0.7790 - val_loss: 0.8394 - val_mse: 0.7411 - val_rmse: 0.8398 Epoch 19/56 244/244 [==============================] - 25s 103ms/step - loss: 0.7743 - mse: 0.9698 - rmse: 0.7770 - val_loss: 0.8340 - val_mse: 0.7318 - val_rmse: 0.8344 Epoch 20/56 244/244 [==============================] - 25s 101ms/step - loss: 0.7721 - mse: 0.9653 - rmse: 0.7751 - val_loss: 0.8285 - val_mse: 0.7225 - val_rmse: 0.8290 Epoch 21/56 244/244 [==============================] - 25s 101ms/step - loss: 0.7699 - mse: 0.9609 - rmse: 0.7732 - val_loss: 0.8229 - val_mse: 0.7131 - val_rmse: 0.8234 Epoch 22/56 244/244 [==============================] - 25s 101ms/step - loss: 0.7677 - mse: 0.9563 - rmse: 0.7713 - val_loss: 0.8172 - val_mse: 0.7036 - val_rmse: 0.8178 Epoch 23/56 244/244 [==============================] - 25s 104ms/step - loss: 0.7656 - mse: 0.9518 - rmse: 0.7694 - val_loss: 0.8114 - val_mse: 0.6941 - val_rmse: 0.8120 Epoch 24/56 244/244 [==============================] - 25s 101ms/step - loss: 0.7632 - mse: 0.9471 - rmse: 0.7674 - val_loss: 0.8055 - val_mse: 0.6844 - val_rmse: 0.8062 Epoch 25/56 244/244 [==============================] - 25s 101ms/step - loss: 0.7611 - mse: 0.9428 - rmse: 0.7657 - val_loss: 0.7995 - val_mse: 0.6747 - val_rmse: 0.8003 Epoch 26/56 244/244 [==============================] - 25s 102ms/step - loss: 0.7587 - mse: 0.9380 - rmse: 0.7636 - val_loss: 0.7933 - val_mse: 0.6648 - val_rmse: 0.7943 Epoch 27/56 244/244 [==============================] - 25s 103ms/step - loss: 0.7566 - mse: 0.9336 - rmse: 0.7618 - val_loss: 0.7871 - val_mse: 0.6549 - val_rmse: 0.7881 Epoch 28/56 244/244 [==============================] - 25s 101ms/step - loss: 0.7543 - mse: 0.9292 - rmse: 0.7600 - val_loss: 0.7807 - val_mse: 0.6448 - val_rmse: 0.7819 Epoch 29/56 244/244 [==============================] - 24s 99ms/step - loss: 0.7521 - mse: 0.9247 - rmse: 0.7582 - val_loss: 0.7742 - val_mse: 0.6346 - val_rmse: 0.7755 Epoch 30/56 244/244 [==============================] - 24s 99ms/step - loss: 0.7496 - mse: 0.9199 - rmse: 0.7561 - val_loss: 0.7676 - val_mse: 0.6243 - val_rmse: 0.7690 Epoch 31/56 244/244 [==============================] - 24s 98ms/step - loss: 0.7473 - mse: 0.9154 - rmse: 0.7542 - val_loss: 0.7608 - val_mse: 0.6139 - val_rmse: 0.7624 Epoch 32/56 244/244 [==============================] - 24s 98ms/step - loss: 0.7450 - mse: 0.9110 - rmse: 0.7524 - val_loss: 0.7540 - val_mse: 0.6035 - val_rmse: 0.7557 Epoch 33/56 244/244 [==============================] - 25s 100ms/step - loss: 0.7426 - mse: 0.9065 - rmse: 0.7505 - val_loss: 0.7470 - val_mse: 0.5929 - val_rmse: 0.7489 Epoch 34/56 244/244 [==============================] - 24s 99ms/step - loss: 0.7402 - mse: 0.9017 - rmse: 0.7485 - val_loss: 0.7399 - val_mse: 0.5823 - val_rmse: 0.7419 Epoch 35/56 244/244 [==============================] - 25s 101ms/step - loss: 0.7376 - mse: 0.8969 - rmse: 0.7465 - val_loss: 0.7327 - val_mse: 0.5715 - val_rmse: 0.7349 Epoch 36/56 244/244 [==============================] - 24s 98ms/step - loss: 0.7353 - mse: 0.8927 - rmse: 0.7447 - val_loss: 0.7254 - val_mse: 0.5608 - val_rmse: 0.7278 Epoch 37/56 244/244 [==============================] - 23s 96ms/step - loss: 0.7328 - mse: 0.8880 - rmse: 0.7428 - val_loss: 0.7180 - val_mse: 0.5500 - val_rmse: 0.7206 Epoch 38/56 244/244 [==============================] - 24s 99ms/step - loss: 0.7304 - mse: 0.8836 - rmse: 0.7409 - val_loss: 0.7106 - val_mse: 0.5392 - val_rmse: 0.7133 Epoch 39/56 244/244 [==============================] - 24s 99ms/step - loss: 0.7280 - mse: 0.8791 - rmse: 0.7391 - val_loss: 0.7031 - val_mse: 0.5285 - val_rmse: 0.7060 Epoch 40/56 244/244 [==============================] - 24s 99ms/step - loss: 0.7256 - mse: 0.8746 - rmse: 0.7373 - val_loss: 0.6956 - val_mse: 0.5177 - val_rmse: 0.6987 Epoch 41/56 244/244 [==============================] - 24s 98ms/step - loss: 0.7233 - mse: 0.8706 - rmse: 0.7357 - val_loss: 0.6881 - val_mse: 0.5071 - val_rmse: 0.6913 Epoch 42/56 244/244 [==============================] - 24s 97ms/step - loss: 0.7209 - mse: 0.8662 - rmse: 0.7338 - val_loss: 0.6805 - val_mse: 0.4966 - val_rmse: 0.6840 Epoch 43/56 244/244 [==============================] - 25s 101ms/step - loss: 0.7186 - mse: 0.8622 - rmse: 0.7322 - val_loss: 0.6729 - val_mse: 0.4862 - val_rmse: 0.6766 Epoch 44/56 244/244 [==============================] - 23s 93ms/step - loss: 0.7163 - mse: 0.8581 - rmse: 0.7306 - val_loss: 0.6654 - val_mse: 0.4758 - val_rmse: 0.6692 Epoch 45/56 244/244 [==============================] - 17s 70ms/step - loss: 0.7140 - mse: 0.8537 - rmse: 0.7288 - val_loss: 0.6578 - val_mse: 0.4656 - val_rmse: 0.6619 Epoch 46/56 244/244 [==============================] - 17s 71ms/step - loss: 0.7118 - mse: 0.8502 - rmse: 0.7273 - val_loss: 0.6502 - val_mse: 0.4555 - val_rmse: 0.6546 Epoch 47/56 244/244 [==============================] - 17s 69ms/step - loss: 0.7097 - mse: 0.8462 - rmse: 0.7259 - val_loss: 0.6426 - val_mse: 0.4456 - val_rmse: 0.6473 Epoch 48/56 244/244 [==============================] - 17s 68ms/step - loss: 0.7076 - mse: 0.8429 - rmse: 0.7244 - val_loss: 0.6351 - val_mse: 0.4359 - val_rmse: 0.6401 Epoch 49/56 244/244 [==============================] - 17s 69ms/step - loss: 0.7053 - mse: 0.8386 - rmse: 0.7228 - val_loss: 0.6276 - val_mse: 0.4263 - val_rmse: 0.6329 Epoch 50/56 244/244 [==============================] - 17s 70ms/step - loss: 0.7033 - mse: 0.8355 - rmse: 0.7215 - val_loss: 0.6202 - val_mse: 0.4170 - val_rmse: 0.6258 Epoch 51/56 244/244 [==============================] - 17s 70ms/step - loss: 0.7013 - mse: 0.8319 - rmse: 0.7202 - val_loss: 0.6128 - val_mse: 0.4078 - val_rmse: 0.6188 Epoch 52/56 244/244 [==============================] - 17s 68ms/step - loss: 0.6994 - mse: 0.8286 - rmse: 0.7189 - val_loss: 0.6055 - val_mse: 0.3989 - val_rmse: 0.6118 Epoch 53/56 244/244 [==============================] - 17s 69ms/step - loss: 0.6975 - mse: 0.8258 - rmse: 0.7178 - val_loss: 0.5983 - val_mse: 0.3902 - val_rmse: 0.6050 Epoch 54/56 244/244 [==============================] - 17s 71ms/step - loss: 0.6959 - mse: 0.8227 - rmse: 0.7168 - val_loss: 0.5912 - val_mse: 0.3817 - val_rmse: 0.5982 Epoch 55/56 244/244 [==============================] - 17s 70ms/step - loss: 0.6939 - mse: 0.8196 - rmse: 0.7156 - val_loss: 0.5842 - val_mse: 0.3734 - val_rmse: 0.5916 Epoch 56/56 244/244 [==============================] - 17s 69ms/step - loss: 0.6923 - mse: 0.8168 - rmse: 0.7147 - val_loss: 0.5773 - val_mse: 0.3654 - val_rmse: 0.5851
import matplotlib.pyplot as plt
plt.plot(history.history['loss'], label='MAE Training loss')
plt.plot(history.history['val_loss'], label='MAE Validation loss')
plt.plot(history.history['mse'], label='MSE Training loss')
plt.plot(history.history['val_mse'], label='MSE Validation loss')
plt.plot(history.history['rmse'], label='RMSE Training loss')
plt.plot(history.history['val_rmse'], label='RMSE Validation loss')
plt.legend();
X_train_pred = model.predict(X_train, verbose=0)
train_mae_loss = np.mean(np.abs(X_train_pred - X_train), axis=1)
plt.hist(train_mae_loss, bins=50)
plt.xlabel('Train MAE loss')
plt.ylabel('Number of Samples');
def evaluate_prediction(predictions, actual, model_name):
errors = predictions - actual
mse = np.square(errors).mean()
rmse = np.sqrt(mse)
mae = np.abs(errors).mean()
print(model_name + ':')
print('Mean Absolute Error: {:.4f}'.format(mae))
print('Root Mean Square Error: {:.4f}'.format(rmse))
print('Mean Square Error: {:.4f}'.format(mse))
print('')
return mae,rmse,mse
mae,rmse,mse = evaluate_prediction(X_train_pred, X_train,"LSTM")
LSTM: Mean Absolute Error: 0.5799 Root Mean Square Error: 0.7813 Mean Square Error: 0.6104
model.save(MODELFILENAME+'.h5')
#càlcul del threshold de test
def calculate_threshold(X_test, X_test_pred):
distance = np.sqrt(np.mean(np.square(X_test_pred - X_test),axis=1))
"""Sorting the scores/diffs and using a 0.80 as cutoff value to pick the threshold"""
distance.sort();
cut_off = int(0.85 * len(distance));
threshold = distance[cut_off];
return threshold
for col in columns:
print ("####################### "+col +" ###########################")
#Standardize the test data
scaler = StandardScaler()
test_cpy = test.copy()
test[col] = scaler.fit_transform(test[[col]])
#creem seqüencia amb finestra temporal per les dades de test
X_test1, y_test1 = create_sequences(test[[col]], test[col])
print(f'Testing shape: {X_test1.shape}')
#evaluem el model
eval = model.evaluate(X_test1, y_test1)
print("evaluate: ",eval)
#predim el model
X_test1_pred = model.predict(X_test1, verbose=0)
evaluate_prediction(X_test1_pred, X_test1,"LSTM")
#càlcul del mae_loss
test1_mae_loss = np.mean(np.abs(X_test1_pred - X_test1), axis=1)
test1_rmse_loss = np.sqrt(np.mean(np.square(X_test1_pred - X_test1),axis=1))
# reshaping test prediction
X_test1_predReshape = X_test1_pred.reshape((X_test1_pred.shape[0] * X_test1_pred.shape[1]), X_test1_pred.shape[2])
# reshaping test data
X_test1Reshape = X_test1.reshape((X_test1.shape[0] * X_test1.shape[1]), X_test1.shape[2])
threshold_test = calculate_threshold(X_test1Reshape,X_test1_predReshape)
test1_score_df = pd.DataFrame(test[TIME_STEPS:])
test1_score_df['loss'] = test1_rmse_loss.reshape((-1))
test1_score_df['threshold'] = threshold_test
test1_score_df['anomaly'] = test1_score_df['loss'] > test1_score_df['threshold']
test1_score_df[col] = test[TIME_STEPS:][col]
#gràfic test lost i threshold
fig = go.Figure()
fig.add_trace(go.Scatter(x=test1_score_df.index, y=test1_score_df['loss'], name='Test loss'))
fig.add_trace(go.Scatter(x=test1_score_df.index, y=test1_score_df['threshold'], name='Threshold'))
fig.update_layout(showlegend=True, title='Test loss vs. Threshold')
fig.show()
#Posem les anomalies en un array
anomalies1 = test1_score_df.loc[test1_score_df['anomaly'] == True]
anomalies1.shape
print('anomalies: ',anomalies1.shape); print();
#Gràfic dels punts i de les anomalíes amb els valors de dades transformades per verificar que la normalització que s'ha fet no distorssiona les dades
fig = go.Figure()
fig.add_trace(go.Scatter(x=test1_score_df.index, y=scaler.inverse_transform(test1_score_df[col]), name=col))
fig.add_trace(go.Scatter(x=anomalies1.index, y=scaler.inverse_transform(anomalies1[col]), mode='markers', name='Anomaly'))
fig.update_layout(showlegend=True, title='Detected anomalies')
fig.show()
print ("######################################################")
####################### PM1 ########################### Testing shape: (636, 144, 1) 3/20 [===>..........................] - ETA: 0s - loss: 0.3194 - mse: 0.1517 - rmse: 0.3378
<ipython-input-17-48420fb1aa44>:8: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy test[col] = scaler.fit_transform(test[[col]])
20/20 [==============================] - 1s 25ms/step - loss: 0.6751 - mse: 0.9417 - rmse: 0.7034 evaluate: [0.6750728487968445, 0.9417482614517212, 0.7034424543380737] LSTM: Mean Absolute Error: 0.5099 Root Mean Square Error: 0.7736 Mean Square Error: 0.5984
anomalies: (290, 10)
###################################################### ####################### PM25 ########################### Testing shape: (636, 144, 1) 4/20 [=====>........................] - ETA: 0s - loss: 0.3384 - mse: 0.2170 - rmse: 0.3541
<ipython-input-17-48420fb1aa44>:8: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
20/20 [==============================] - 0s 21ms/step - loss: 0.7108 - mse: 1.0219 - rmse: 0.7390 evaluate: [0.7107872366905212, 1.0219496488571167, 0.7389551997184753] LSTM: Mean Absolute Error: 0.5353 Root Mean Square Error: 0.7778 Mean Square Error: 0.6050
anomalies: (178, 10)
###################################################### ####################### PM10 ########################### Testing shape: (636, 144, 1) 3/20 [===>..........................] - ETA: 0s - loss: 0.3355 - mse: 0.1808 - rmse: 0.3510
<ipython-input-17-48420fb1aa44>:8: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
20/20 [==============================] - 0s 22ms/step - loss: 0.7378 - mse: 1.0737 - rmse: 0.7659 evaluate: [0.7378247976303101, 1.0737113952636719, 0.7658700942993164] LSTM: Mean Absolute Error: 0.5576 Root Mean Square Error: 0.7818 Mean Square Error: 0.6112
anomalies: (171, 10)
###################################################### ####################### PM1ATM ########################### Testing shape: (636, 144, 1) 4/20 [=====>........................] - ETA: 0s - loss: 0.3806 - mse: 0.2621 - rmse: 0.3974
<ipython-input-17-48420fb1aa44>:8: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
20/20 [==============================] - 0s 21ms/step - loss: 0.7409 - mse: 0.9767 - rmse: 0.7690 evaluate: [0.7408923506736755, 0.976707398891449, 0.7690261006355286] LSTM: Mean Absolute Error: 0.5689 Root Mean Square Error: 0.7826 Mean Square Error: 0.6124
anomalies: (188, 10)
###################################################### ####################### PM25ATM ########################### Testing shape: (636, 144, 1) 4/20 [=====>........................] - ETA: 0s - loss: 0.3723 - mse: 0.2538 - rmse: 0.3897
<ipython-input-17-48420fb1aa44>:8: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
20/20 [==============================] - 0s 21ms/step - loss: 0.7298 - mse: 0.9600 - rmse: 0.7584 evaluate: [0.7298371195793152, 0.9600314497947693, 0.7583643794059753] LSTM: Mean Absolute Error: 0.5607 Root Mean Square Error: 0.7787 Mean Square Error: 0.6063
anomalies: (204, 10)
###################################################### ####################### PM10ATM ########################### Testing shape: (636, 144, 1) 1/20 [>.............................] - ETA: 0s - loss: 0.3765 - mse: 0.1854 - rmse: 0.3964
<ipython-input-17-48420fb1aa44>:8: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
20/20 [==============================] - 0s 23ms/step - loss: 0.7232 - mse: 0.9698 - rmse: 0.7511 evaluate: [0.7232180237770081, 0.9697902798652649, 0.7510720491409302] LSTM: Mean Absolute Error: 0.5496 Root Mean Square Error: 0.7747 Mean Square Error: 0.6001
anomalies: (171, 10)
######################################################